miχpods: MOM6
Contents
miχpods: MOM6#
baseline KPP missing
bootstrap error on mean, median?
Try daily vs hourly
add TAO χpods
fix EUC max at 110
figure out time base
check eps vs Ri histogram
I think my Ri is too low?
Questions:
N2 vs N2T
match time intervals
match vertical resolution and extent
restrict TAO S2 to ADCP depth range
restrict to top 200m.
Notes:
Filtering out MLD makes a big difference!
SInce we’re working with derivatives does the vertical grid matter (as long as it is coarser than observations)?
1 difvho ocean_vertical_heat_diffusivity
2 difvso ocean_vertical_salt_diffusivity
Need lateral / neutral terms for total χ, ε
Why can’t I reproduce the ε figure?
Why can’t I save the new station data
Need to consider Ri smoothing in models: From Gustavo:
You mentioned some kind of inconsistency between your diagnostics and what MOM6 was doing for the interior shear mixing scheme. I am working to implement the option to apply vertical smoothing in Ri multiple times, instead of only once which is what we are doing right now. I noticed that the diagnostic
ri_grad_shearis saved before the smoothing is applied. There is another diagnostic (ri_grad_shear_smooth) that saves the smoothed Ri. Perhaps you were looking at ri_grad_shear instead of ri_grad_shear_smooth and this can explain the inconsistency.
Diagnostics notes#
go through
prepare; pass in dataset and optionalxgcm.Griddifferent pre-processing steps for different datasets
order, sign of z coordinate is painful; here normalizing
composing with matplotlib subfigures
References#
Warner & Moum (2019)
Setup#
%load_ext watermark
import datetime
import glob
import os
import warnings
import cf_xarray as cfxr
import dask
import dask_jobqueue
import dcpy
import distributed
import flox.xarray
import holoviews as hv
import hvplot.xarray
import matplotlib as mpl
import matplotlib.pyplot as plt
import mom6_tools
import numpy as np
import pandas as pd
import xarray as xr
import xgcm
from holoviews import opts
%aimport pump
from pump import mixpods
hv.notebook_extension("bokeh")
dask.config.set({"array.slicing.split_large_chunks": False})
plt.style.use("bmh")
plt.rcParams["figure.dpi"] = 140
xr.set_options(keep_attrs=True, display_expand_data=False)
gcmdir = "/glade/campaign/cgd/oce/people/bachman/TPOS_1_20_20_year/OUTPUT/" # MITgcm output directory
stationdirname = gcmdir
mixing_layer_depth_criteria = {
"boundary_layer_depth": {"name": "KPPhbl|KPP_OBLdepth|ePBL_h_ML"},
}
%watermark -iv
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:20: FutureWarning: tmpfile is deprecated and will be removed in a future release. Please use dask.utils.tmpfile instead.
from distributed.utils import tmpfile
dcpy : 0.1.dev385+g121534c
dask : 2022.11.1
sys : 3.10.8 | packaged by conda-forge | (main, Nov 22 2022, 08:26:04) [GCC 10.4.0]
dask_jobqueue: 0.7.3
pump : 0.1
pandas : 1.5.2
json : 2.0.9
flox : 0.6.4
mom6_tools : 0.0.post244
xarray : 2022.11.0
xgcm : 0.6.1
numpy : 1.23.5
matplotlib : 3.6.2
holoviews : 1.15.2
cf_xarray : 0.7.5
hvplot : 0.8.2
distributed : 2022.11.1
if "client" in locals():
client.close()
del client
if "cluster" in locals():
cluster.close()
# env = {"OMP_NUM_THREADS": "3", "NUMBA_NUM_THREADS": "3"}
# cluster = distributed.LocalCluster(
# n_workers=8,
# threads_per_worker=1,
# env=env
# )
if "cluster" in locals():
del cluster
# cluster = ncar_jobqueue.NCARCluster(
# project="NCGD0011",
# scheduler_options=dict(dashboard_address=":9797"),
# )
# cluster = dask_jobqueue.PBSCluster(
# cores=9, processes=9, memory="108GB", walltime="02:00:00", project="NCGD0043",
# env_extra=env,
# )
cluster = dask_jobqueue.PBSCluster(
cores=1, # The number of cores you want
memory="23GB", # Amount of memory
processes=1, # How many processes
queue="casper", # The type of queue to utilize (/glade/u/apps/dav/opt/usr/bin/execcasper)
# log_directory="/glade/scratch/dcherian/dask/", # Use your local directory
resource_spec="select=1:ncpus=1:mem=23GB", # Specify resources
project="ncgd0011", # Input your project ID here
walltime="02:00:00", # Amount of wall time
interface="ib0", # Interface to use
)
cluster.adapt(maximum_jobs=24, minimum_jobs=3)
client = distributed.Client(cluster)
client
Client
Client-f9c48d0b-7199-11ed-84ca-3cecef1ac722
| Connection method: Cluster object | Cluster type: dask_jobqueue.PBSCluster |
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status |
Cluster Info
PBSCluster
0664a41a
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status | Workers: 0 |
| Total threads: 0 | Total memory: 0 B |
Scheduler Info
Scheduler
Scheduler-96606105-683c-47d6-81eb-7705dc1aedaa
| Comm: tcp://10.12.206.36:46132 | Workers: 0 |
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status | Total threads: 0 |
| Started: Just now | Total memory: 0 B |
Workers
Read data#
TAO#
tao_Ri = xr.load_dataarray(
"tao-hourly-Ri-seasonal-percentiles.nc"
).cf.guess_coord_axis()
tao_gridded = xr.open_dataset(
os.path.expanduser("~/work/pump/zarrs/tao-gridded-ancillary.zarr"),
chunks="auto",
engine="zarr",
).sel(longitude=-140, time=slice("1996", None))
tao_gridded["depth"].attrs["axis"] = "Z"
tao_gridded["shallowest"].attrs.clear()
chipod = (
dcpy.oceans.read_cchdo_chipod_file(
"~/datasets/microstructure/osu/chipods_0_140W.nc"
)
.sel(time=slice("2015"))
# move from time on the half hour to on the hour
.coarsen(time=2, boundary="trim")
.mean()
# "Only χpods between 29 and 69 m are used in this analysis as
# deeper χpods are more strongly influenced by the variability of zEUC than by surface forcing."
# - Warner and Moum (2019)
.sel(depth=slice(29, 69))
.reindex(time=tao_gridded.time, method="nearest", tolerance="5min")
.pipe(mixpods.normalize_z, sort=True)
)
tao_gridded = tao_gridded.update(
chipod[["chi", "KT", "eps", "Jq"]]
.reset_coords(drop=True)
.rename({"depth": "depthchi"})
)
tao_gridded = mixpods.prepare(tao_gridded, oni=pump.obs.process_oni())
tao_gridded = tao_gridded.update(
mixpods.pdf_N2S2(tao_gridded.drop_vars(["shallowest", "zeuc"]))
)
tao_gridded = mixpods.load(tao_gridded)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:247: UserWarning: The specified Dask chunks separate the stored chunks along dimension "depth" starting at index 42. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:247: UserWarning: The specified Dask chunks separate the stored chunks along dimension "time" starting at index 199726. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:247: UserWarning: The specified Dask chunks separate the stored chunks along dimension "longitude" starting at index 2. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
np.log10(tao_gridded.eps).sel(time=slice("2005", "2015")).resample(
time="M"
).mean().hvplot.quadmesh(clim=(-7.5, -6))
sub = tao_gridded.sel(time="2010-01")
t = sub.theta.hvplot.quadmesh(cmap="turbo_r")
dt = (
sub.theta - sub.theta.reset_coords(drop=True).cf.sel(Z=[0, -5]).cf.max("Z")
).hvplot.quadmesh(clim=(-0.15, 0.15), cmap="RdBu_r")
newmld = mixpods.get_mld_tao_theta(sub.theta.reset_coords(drop=True))
(
dt
* sub.reset_coords().mldT.hvplot.line(color="w", line_width=2)
* newmld.reset_coords(drop=True).hvplot.line(color="orange", line_width=1)
).opts(frame_width=1200)
(
tao_gridded.reset_coords().mldT.resample(time="5D").mean().hvplot.line()
* mixpods.get_mld_tao_theta((tao_gridded.reset_coords().theta))
.resample(time="5D")
.mean()
.hvplot.line()
)
tao_gridded.u.cf.plot()
tao_gridded.eucmax.plot()
[<matplotlib.lines.Line2D at 0x2b38c6a4a8c0>]
MITgcm stations#
stations = pump.model.read_stations_20(stationdirname)
gcmeq = stations.sel(
longitude=[-155.025, -140.025, -125.025, -110.025], method="nearest"
)
# enso = pump.obs.make_enso_mask()
# mitgcm["enso"] = enso.reindex(time=mitgcm.time.data, method="nearest")
# gcmeq["eucmax"] = pump.calc.get_euc_max(gcmeq.u)
# pump.calc.calc_reduced_shear(gcmeq)
# oni = pump.obs.process_oni()
# gcmeq["enso_transition"] = mixpods.make_enso_transition_mask(oni).reindex(time=gcmeq.time.data, method="nearest")
mitgcm = gcmeq.sel(longitude=-140.025, method="nearest")
metrics = mixpods.normalize_z(pump.model.read_metrics(stationdirname), sort=True)
mitgcm = mixpods.normalize_z(mitgcm, sort=True)
mitgcm_grid = xgcm.Grid(
metrics.sel(latitude=mitgcm.latitude, longitude=mitgcm.longitude, method="nearest"),
coords=({"Z": {"center": "depth", "outer": "RF"}, "Y": {"center": "latitude"}}),
metrics={"Z": ("drF", "drC")},
periodic=False,
boundary="fill",
fill_value=np.nan,
)
mitgcm.theta.attrs["standard_name"] = "sea_water_potential_temperature"
mitgcm.salt.attrs["standard_name"] = "sea_water_salinity"
mitgcm["KPPviscAz"].attrs["standard_name"] = "ocean_vertical_viscosity"
mitgcm = (
mixpods.prepare(mitgcm, grid=mitgcm_grid, oni=pump.obs.process_oni())
.sel(latitude=0, method="nearest")
.cf.sel(Z=slice(-250, 0))
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/core.py:4806: PerformanceWarning: Increasing number of chunks by factor of 37
result = blockwise(
mitgcm = xr.merge([mitgcm, mixpods.pdf_N2S2(mitgcm)])
mitgcm = mixpods.load(mitgcm)
mitgcm
<xarray.Dataset>
Dimensions: (depth: 100, time: 174000, RF: 101, N2T_bins: 29,
S2_bins: 29, enso_transition_phase: 7, stat: 2,
N2_bins: 29, Rig_T_bins: 9)
Coordinates: (12/20)
* depth (depth) float32 -248.8 -246.2 -243.8 ... -3.75 -1.25
latitude float64 0.025
longitude float64 -140.0
* time (time) datetime64[ns] 1998-12-31T18:00:00 ... 2018...
* RF (RF) float64 -250.0 -247.5 -245.0 ... -5.0 -2.5 0.0
eucmax (time) float32 dask.array<chunksize=(162,), meta=np.ndarray>
... ...
* S2_bins (S2_bins) object [-5.0, -4.9) ... [-2.200000000000...
* enso_transition_phase (enso_transition_phase) object 'none' ... 'all'
* stat (stat) object 'mean' 'count'
* N2_bins (N2_bins) object [-5.0, -4.9) ... [-2.200000000000...
bin_areas (N2T_bins, S2_bins) float64 0.01 0.01 ... 0.01 0.01
* Rig_T_bins (Rig_T_bins) object (-1.6, -1.4000000000000001] .....
Data variables: (12/31)
DFrI_TH (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPdiffKzT (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPg_TH (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPhbl (time) float32 dask.array<chunksize=(6000,), meta=np.ndarray>
KPPviscAz (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
SSH (time) float32 dask.array<chunksize=(6000,), meta=np.ndarray>
... ...
Rig_T (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
Rig (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
eps (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
n2s2pdf (N2T_bins, S2_bins, enso_transition_phase) float64 ...
eps_n2s2 (stat, N2_bins, S2_bins, enso_transition_phase) float64 ...
eps_ri (stat, Rig_T_bins, enso_transition_phase) float64 ...
Attributes:
easting: longitude
northing: latitude
title: Station profile, index (i,j)=(1201,240)mitgcm.u.cf.plot()
mitgcm.mldT.reset_coords(drop=True).cf.plot()
mitgcm.eucmax.reset_coords(drop=True).cf.plot()
[<matplotlib.lines.Line2D at 0x2ae9f91ebdc0>]
mixpods.plot_n2s2pdf(mitgcm.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2ae9b69f4ac0>
Error#
shape = (101, 174000)
dims = ("RF", "time")
eps = mitgcm.eps.copy(
data=dask.array.random.random_sample(shape, chunks=((100, 1), 8760))
)
N2 = mitgcm.N2T.copy(
data=dask.array.random.random_sample(shape, chunks=((100, 1), 8760)), name="N2"
)
S2 = mitgcm.S2.copy(
data=dask.array.random.random_sample(shape, chunks=((100, 1), 8760)), name="S2"
)
enso_transition = mitgcm.enso_transition
bins = np.arange(-5, -2.05, 0.1)
func = ["mean", "std"]
out = [
flox.xarray.xarray_reduce(
eps,
N2,
S2,
enso_transition,
func=f,
expected_groups=(bins, bins, None),
isbin=(True, True, False),
)
for f in func
]
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In [100], line 4
2 dims = ("RF", "time")
3 eps = mitgcm.eps.copy(data=dask.array.random.random_sample(shape, chunks=((100,1), 8760)))
----> 4 N2 = mitgcm.N2T.copy(data=dask.array.random.random_sample(shape, chunks=((100,1), 8760)), name="N2")
5 S2 = mitgcm.S2.copy(data=dask.array.random.random_sample(shape, chunks=((100,1), 8760)))
6 enso_transition = mitgcm.enso_transition
TypeError: DataArray.copy() got an unexpected keyword argument 'name'
rng = np.random.default_rng()
mitgcm.eps.shape
(101, 174000)
# shape = (10, 2000)
# chunks = ((shape[0]-1,1), 10)
shape = (101, 174000)
chunks = ((101,), 8760)
dims = ("RF", "time")
eps = xr.DataArray(
data=dask.array.random.random_sample(shape, chunks=chunks), dims=dims
)
N2 = xr.DataArray(
data=dask.array.random.random_sample(shape, chunks=chunks), dims=dims, name="N2"
)
S2 = xr.DataArray(
data=dask.array.random.random_sample(shape, chunks=chunks), dims=dims, name="S2"
)
enso_transition = xr.DataArray(
data=np.arange(5)[rng.integers(low=0, high=5, size=shape[-1])],
dims=dims[1],
name="enso",
)
bins = np.arange(-5, -2.05, 0.1)
func = ["mean", "std"]
out = [
flox.xarray.xarray_reduce(
eps,
N2,
S2,
enso_transition,
func=f,
expected_groups=(bins, bins, None),
isbin=(True, True, False),
)
for f in func
]
MOM6#
calculate ONI#
(
oniobs.hvplot.line(x="time", label="obs")
* onimom6.hvplot.line(x="time", label="MOM6")
).opts(ylabel="ONI [°C]")
oniobs.enso_transition_mask.plot()
onimom6.enso_transition_mask.plot(color="r")
[<matplotlib.lines.Line2D at 0x2b6d7fd20eb0>]
MOM6 sections#
Combine sections#
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods"
mixpods.mom6_sections_to_zarr(casename)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:255: FutureWarning: job_extra has been renamed to job_extra_directives. You are still using it (even if only set to []; please also check config files). If you did not set job_extra_directives yet, job_extra will be respected for now, but it will be removed in a future release. If you already set job_extra_directives, job_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:274: FutureWarning: env_extra has been renamed to job_script_prologue. You are still using it (even if only set to []; please also check config files). If you did not set job_script_prologue yet, env_extra will be respected for now, but it will be removed in a future release. If you already set job_script_prologue, env_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/pbs.py:82: FutureWarning: project has been renamed to account as this kwarg was used wit -A option. You are still using it (please also check config files). If you did not set account yet, project will be respected for now, but it will be removed in a future release. If you already set account, project is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
100%|██████████| 21/21 [02:28<00:00, 7.06s/it]
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/times.py:360: FutureWarning: Index.ravel returning ndarray is deprecated; in a future version this will return a view on self.
sample = dates.ravel()[0]
/glade/u/home/dcherian/pump/pump/mixpods.py:844: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.epbl.001.mixpods"
mixpods.mom6_sections_to_zarr(casename)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:255: FutureWarning: job_extra has been renamed to job_extra_directives. You are still using it (even if only set to []; please also check config files). If you did not set job_extra_directives yet, job_extra will be respected for now, but it will be removed in a future release. If you already set job_extra_directives, job_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:274: FutureWarning: env_extra has been renamed to job_script_prologue. You are still using it (even if only set to []; please also check config files). If you did not set job_script_prologue yet, env_extra will be respected for now, but it will be removed in a future release. If you already set job_script_prologue, env_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:255: FutureWarning: job_extra has been renamed to job_extra_directives. You are still using it (even if only set to []; please also check config files). If you did not set job_extra_directives yet, job_extra will be respected for now, but it will be removed in a future release. If you already set job_extra_directives, job_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:274: FutureWarning: env_extra has been renamed to job_script_prologue. You are still using it (even if only set to []; please also check config files). If you did not set job_script_prologue yet, env_extra will be respected for now, but it will be removed in a future release. If you already set job_script_prologue, env_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
100%|██████████| 31/31 [01:59<00:00, 3.85s/it]
/glade/u/home/dcherian/pump/pump/mixpods.py:846: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
Read sections#
dask.config.set(scheduler=client)
m = mixpods.read_mom6_sections(
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods"
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:255: FutureWarning: job_extra has been renamed to job_extra_directives. You are still using it (even if only set to []; please also check config files). If you did not set job_extra_directives yet, job_extra will be respected for now, but it will be removed in a future release. If you already set job_extra_directives, job_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:274: FutureWarning: env_extra has been renamed to job_script_prologue. You are still using it (even if only set to []; please also check config files). If you did not set job_script_prologue yet, env_extra will be respected for now, but it will be removed in a future release. If you already set job_script_prologue, env_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/pbs.py:82: FutureWarning: project has been renamed to account as this kwarg was used wit -A option. You are still using it (please also check config files). If you did not set account yet, project will be respected for now, but it will be removed in a future release. If you already set account, project is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
100%|██████████| 21/21 [02:14<00:00, 6.42s/it]
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/times.py:360: FutureWarning: Index.ravel returning ndarray is deprecated; in a future version this will return a view on self.
sample = dates.ravel()[0]
/glade/u/home/dcherian/pump/pump/mixpods.py:847: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
/glade/u/home/dcherian/pump/pump/mixpods.py:859: UserWarning: Kv_v not present. Assuming equal to Kv_u
warnings.warn("Kv_v not present. Assuming equal to Kv_u")
m.drop_vars(["average_DT", "average_T1", "average_T2", "time_bnds"]).chunk(
{"time": 365 * 24}
).to_zarr(
"/glade/scratch/dcherian/archive/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/ocn/moorings/tao.zarr",
mode="w",
)
m.sel(yh=0, method="nearest")[["ePBL_h_ML", "mlotst"]].to_array().hvplot.line(
by="variable", x="time"
)
m.sel(yh=0, method="nearest")[["Kd_heat", "Kd_ePBL"]].to_array().hvplot.line(
by="variable", groupby="time", logy=True, ylim=(1e-6, 1e-1), xlim=(0, 500)
)
mom6140 = mixpods.load_mom6_sections(casename).load()
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/cf_xarray/accessor.py:1638: UserWarning: Variables {'areacello'} not found in object but are referred to in the CF attributes.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
mom6140.uo.cf.plot(robust=True)
mom6140.eucmax.cf.plot()
mom6140.mldT.cf.plot(lw=0.5, color="k")
mixpods.plot_n2s2pdf(mom6140.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2acdb4c75e10>
Pick simulations#
mom6140 = mixpods.load_mom6_sections(
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods"
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/cf_xarray/accessor.py:1663: UserWarning: Variables {'areacello'} not found in object but are referred to in the CF attributes.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1130: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
epbl = mixpods.load_mom6_sections(
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.epbl.001.mixpods"
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/cf_xarray/accessor.py:1663: UserWarning: Variables {'areacello'} not found in object but are referred to in the CF attributes.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1130: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1515: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
datasets = {
"TAO": tao_gridded,
"MITgcm": mitgcm,
"MOM6 KPP": mixpods.load(mom6140),
"MOM6 ePBL": mixpods.load(epbl),
}
from datatree import DataTree
tree = DataTree.from_dict(
{k: v.sel(time=slice("2000", "2017")) for k, v in datasets.items()}
)
tree["MOM6 KPP"].ds["time"] = tree["MOM6 KPP"].ds["time"] - pd.Timedelta("7h")
Verify depth is normalized#
for name, ds in datasets.items():
(ds.cf["sea_water_x_velocity"].cf["Z"].plot(marker=".", ls="none", label=name))
plt.legend()
<matplotlib.legend.Legend at 0x2aed634274c0>
Compare EUC maximum and MLD#
Monthly climatology#
import tqdm
for node in tqdm.tqdm(tree.children):
tree[node]["mldT"] = tree[node]["mldT"].load()
100%|██████████| 4/4 [00:52<00:00, 13.10s/it]
def to_dataset(tree):
concat = xr.concat([v.ds for v in tree.children.values()], dim="node")
concat["node"] = list(tree.children.keys())
return concat
clim = to_dataset(
tree.map_over_subtree(
lambda x: x.reset_coords()[["mldT", "eucmax"]].groupby("time.month").mean()
)
).load()
clim
<xarray.Dataset>
Dimensions: (node: 4, month: 12)
Coordinates:
* month (month) int64 1 2 3 4 5 6 7 8 9 10 11 12
* node (node) <U9 'TAO' 'MITgcm' 'MOM6 KPP' 'MOM6 ePBL'
Data variables:
mldT (node, month) float64 -28.23 -18.01 -14.84 ... -25.31 -32.05 -33.92
eucmax (node, month) float64 -108.0 -104.2 -95.85 ... -119.1 -120.1 -120.9hv.Layout(
[
hv.Overlay(
[
tree[node]
.ds["mldT"]
.reset_coords(drop=True)
.groupby("time.month")[month]
.hvplot.hist(label=node, legend=True)
.opts(frame_width=150)
for node in tree.children
]
).opts(title=str(month))
for month in np.arange(1, 13)
]
).cols(4)
(clim.mldT.hvplot(by="node") + clim.eucmax.hvplot(by="node")).cols(1)
mixpods.plot_timeseries(tree, "mldT", obs="TAO")